Use $wgUpdateRowsPerQuery instead, and use its value at run-time
instead of inside the job parameters. Also helps with deduplication
if batchSize is changed.
Change-Id: Ifef25a3ae5ae2418359a41eba05778613fbb548f
/**
* @param Title|null $title Not used by this job.
* @param array $params
/**
* @param Title|null $title Not used by this job.
* @param array $params
- * - batchSize, Number of watchlist entries to remove at once.
* - userId, The ID for the user whose watchlist is being cleared.
* - maxWatchlistId, The maximum wl_id at the time the job was first created,
*/
public function __construct( Title $title = null, array $params ) {
* - userId, The ID for the user whose watchlist is being cleared.
* - maxWatchlistId, The maximum wl_id at the time the job was first created,
*/
public function __construct( Title $title = null, array $params ) {
- if ( !array_key_exists( 'batchSize', $params ) ) {
- $params['batchSize'] = 1000;
- }
-
parent::__construct(
'clearUserWatchlist',
SpecialPage::getTitleFor( 'EditWatchlist', 'clear' ),
parent::__construct(
'clearUserWatchlist',
SpecialPage::getTitleFor( 'EditWatchlist', 'clear' ),
}
public function run() {
}
public function run() {
+ global $wgUpdateRowsPerQuery;
$userId = $this->params['userId'];
$maxWatchlistId = $this->params['maxWatchlistId'];
$userId = $this->params['userId'];
$maxWatchlistId = $this->params['maxWatchlistId'];
+ $batchSize = $wgUpdateRowsPerQuery;
$loadBalancer = MediaWikiServices::getInstance()->getDBLoadBalancer();
$dbw = $loadBalancer->getConnection( DB_MASTER );
$loadBalancer = MediaWikiServices::getInstance()->getDBLoadBalancer();
$dbw = $loadBalancer->getConnection( DB_MASTER );
__METHOD__,
[
'ORDER BY' => 'wl_id ASC',
__METHOD__,
[
'ORDER BY' => 'wl_id ASC',
- 'LIMIT' => $this->params['batchSize'],
$lbf->commitMasterChanges( __METHOD__ );
unset( $scopedLock );
$lbf->commitMasterChanges( __METHOD__ );
unset( $scopedLock );
- if ( count( $watchlistIds ) == $this->params['batchSize'] ) {
+ if ( count( $watchlistIds ) === (int)$batchSize ) {
+ // Until we get less results than the limit, recursively push
+ // the same job again.
JobQueueGroup::singleton()->push( new self( $this->getTitle(), $this->getParams() ) );
}
JobQueueGroup::singleton()->push( new self( $this->getTitle(), $this->getParams() ) );
}
$watchedItemStore->addWatch( $user, new TitleValue( 0, 'C' ) );
$watchedItemStore->addWatch( $user, new TitleValue( 1, 'C' ) );
$watchedItemStore->addWatch( $user, new TitleValue( 0, 'C' ) );
$watchedItemStore->addWatch( $user, new TitleValue( 1, 'C' ) );
+ $this->setMwGlobals( 'wgUpdateRowsPerQuery', 2 );
+
JobQueueGroup::singleton()->push(
new ClearUserWatchlistJob(
null,
[
'userId' => $user->getId(),
JobQueueGroup::singleton()->push(
new ClearUserWatchlistJob(
null,
[
'userId' => $user->getId(),
'maxWatchlistId' => $maxId,
]
)
'maxWatchlistId' => $maxId,
]
)